}).collect()
}
+ /// For a package, return all targets which are registered as build
+ /// dependencies for that package.
+ pub fn build_dep_targets(&self, _pkg: &Package)
+ -> Vec<(&'a Package, &'a Target)> {
+ // FIXME: needs implementation
+ vec![]
+ }
+
/// Gets a package for the given package id.
pub fn get_package(&self, id: &PackageId) -> &'a Package {
self.package_set.iter()
-use std::io::{fs, BufReader, USER_RWX};
+use std::fmt;
+use std::io::{fs, BufReader, USER_RWX, File};
use std::io::fs::PathExtensions;
use core::{Package, Target};
use util::{internal, ChainError};
use super::job::Work;
-use super::{process, KindHost, Context};
+use super::{fingerprint, process, KindHost, Context};
+use util::Freshness;
/// Contains the parsed output of a custom build script.
#[deriving(Clone)]
}
/// Prepares a `Work` that executes the target as a custom build script.
-pub fn prepare_execute_custom_build(pkg: &Package, target: &Target,
- cx: &mut Context)
- -> CargoResult<Work> {
- let layout = cx.layout(pkg, KindHost);
- let script_output = layout.build(pkg);
- let build_output = layout.build_out(pkg);
+pub fn prepare(pkg: &Package, target: &Target, cx: &mut Context)
+ -> CargoResult<(Work, Work, Freshness)> {
+ let (script_output, build_output, old_build_output) = {
+ let layout = cx.layout(pkg, KindHost);
+ (layout.build(pkg),
+ layout.build_out(pkg),
+ layout.proxy().old_build(pkg).join("out"))
+ };
// Building the command to execute
let to_exec = try!(cx.target_filenames(target))[0].clone();
let to_exec = script_output.join(to_exec);
- // Filling environment variables
+ // Start preparing the process to execute, starting out with some
+ // environment variables.
let profile = target.get_profile();
- let mut p = process(to_exec, pkg, cx)
+ let mut p = super::process(to_exec, pkg, cx)
.env("OUT_DIR", Some(&build_output))
.env("CARGO_MANIFEST_DIR", Some(pkg.get_manifest_path()
- .display().to_string()))
- .env("NUM_JOBS", profile.get_codegen_units().map(|n| n.to_string()))
+ .dir_path()
+ .display().to_string()))
+ .env("NUM_JOBS", Some(cx.config.jobs().to_string()))
.env("TARGET", Some(cx.target_triple()))
.env("DEBUG", Some(profile.get_debug().to_string()))
.env("OPT_LEVEL", Some(profile.get_opt_level().to_string()))
.env("PROFILE", Some(profile.get_env()));
+ // Be sure to pass along all enabled features for this package, this is the
+ // last piece of statically known information that we have.
match cx.resolve.features(pkg.get_package_id()) {
Some(features) => {
for feat in features.iter() {
None => {}
}
- // Gather the set of native dependencies that this package has
+ // Gather the set of native dependencies that this package has along with
+ // some other variables to close over.
+ //
+ // This information will be used at build-time later on to figure out which
+ // sorts of variables need to be discovered at that time.
let lib_deps = {
cx.dep_targets(pkg).iter().filter_map(|&(pkg, _)| {
pkg.get_manifest().get_links()
}).map(|s| s.to_string()).collect::<Vec<_>>()
};
-
+ let lib_name = pkg.get_manifest().get_links().map(|s| s.to_string());
+ let pkg_name = pkg.to_string();
let native_libs = cx.native_libs.clone();
- // Building command
- let pkg = pkg.to_string();
- let work = proc(desc_tx: Sender<String>) {
+ try!(fs::mkdir(&script_output, USER_RWX));
- if !build_output.exists() {
- try!(fs::mkdir(&build_output, USER_RWX).chain_error(|| {
- internal("failed to create build output directory for \
- build command")
- }))
- }
+ // Prepare the unit of "dirty work" which will actually run the custom build
+ // command.
+ //
+ // Note that this has to do some extra work just before running the command
+ // to determine extra environment variables and such.
+ let work = proc(desc_tx: Sender<String>) {
+ // Make sure that OUT_DIR exists.
+ //
+ // If we have an old build directory, then just move it into place,
+ // otherwise create it!
+ try!(if old_build_output.exists() {
+ fs::rename(&old_build_output, &build_output)
+ } else {
+ fs::mkdir(&build_output, USER_RWX)
+ }.chain_error(|| {
+ internal("failed to create script output directory for \
+ build command")
+ }));
- // loading each possible custom build output file in order to get their
- // metadata
+ // For all our native lib dependencies, pick up their metadata to pass
+ // along to this custom build command.
let mut p = p;
{
let native_libs = native_libs.lock();
}
}
+ // And now finally, run the build command itself!
desc_tx.send_opt(p.to_string()).ok();
let output = try!(p.exec_with_output().map_err(|mut e| {
e.msg = format!("Failed to run custom build command for `{}`\n{}",
- pkg, e.msg);
+ pkg_name, e.msg);
e.concrete().mark_human()
}));
- // parsing the output of the custom build script to check that it's correct
- try!(BuildOutput::parse(BufReader::new(output.output.as_slice()),
- pkg.as_slice()));
+ // After the build command has finished running, we need to be sure to
+ // remember all of its output so we can later discover precisely what it
+ // was, even if we don't run the build command again (due to freshness).
+ //
+ // This is also the location where we provide feedback into the build
+ // state informing what variables were discovered via our script as
+ // well.
+ let rdr = BufReader::new(output.output.as_slice());
+ let build_output = try!(BuildOutput::parse(rdr, pkg_name.as_slice()));
+ match lib_name {
+ Some(name) => assert!(native_libs.lock().insert(name, build_output)),
+ None => {}
+ }
- // writing the output to the right directory
- try!(fs::File::create(&script_output.join("output")).write(output.output.as_slice())
- .map_err(|e| {
- human(format!("failed to write output of custom build command: {}", e))
- }));
+ try!(File::create(&script_output.join("output"))
+ .write(output.output.as_slice()).map_err(|e| {
+ human(format!("failed to write output of custom build command: {}",
+ e))
+ }));
Ok(())
};
- Ok(work)
+ // Now that we've prepared our work-to-do, we need to prepare the fresh work
+ // itself to run when we actually end up just discarding what we calculated
+ // above.
+ //
+ // Note that the freshness calculation here is the build_cmd freshness, not
+ // target specific freshness. This is because we don't actually know what
+ // the inputs are to this command!
+ let (freshness, dirty, fresh) =
+ try!(fingerprint::prepare_build_cmd(cx, pkg, Some(target)));
+ let dirty = proc(tx: Sender<String>) { try!(work(tx.clone())); dirty(tx) };
+ let fresh = proc(tx) {
+ fresh(tx)
+ };
+
+ Ok((dirty, fresh, freshness))
}
impl BuildOutput {
let key = iter.next();
let value = iter.next();
let (key, value) = match (key, value) {
- (Some(a), Some(b)) => (a, b),
+ (Some(a), Some(b)) => (a, b.trim_right()),
// line started with `cargo:` but didn't match `key=value`
_ => return Err(human(format!("Wrong output in {}: `{}`",
whence, line)))
Ok((library_paths, library_links))
}
}
+
+impl fmt::Show for BuildOutput {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ write!(f, "BuildOutput {{ paths: [..], libs: {}, metadata: {} }}",
+ self.library_links, self.metadata)
+ }
+}
let (old_root, root) = {
let layout = cx.layout(pkg, kind);
- if target.is_example() {
+ if target.get_profile().is_custom_build() {
+ (layout.old_build(pkg), layout.build(pkg))
+ } else if target.is_example() {
(layout.old_examples().clone(), layout.examples().clone())
} else {
(layout.old_root().clone(), layout.root().clone())
///
/// The currently implemented solution is option (1), although it is planned to
/// migrate to option (2) in the near future.
-pub fn prepare_build_cmd(cx: &mut Context, pkg: &Package)
- -> CargoResult<Preparation> {
+pub fn prepare_build_cmd(cx: &mut Context, pkg: &Package,
+ target: Option<&Target>) -> CargoResult<Preparation> {
let _p = profile::start(format!("fingerprint build cmd: {}",
pkg.get_package_id()));
let new_fingerprint = mk_fingerprint(cx, &new_fingerprint);
let is_fresh = try!(is_fresh(&old_loc, new_fingerprint.as_slice()));
- let pairs = vec![(old_loc, new_loc.clone()),
- (cx.layout(pkg, kind).old_native(pkg),
- cx.layout(pkg, kind).native(pkg))];
+ let mut pairs = vec![(old_loc, new_loc.clone())];
- let native_dir = cx.layout(pkg, kind).native(pkg);
- cx.compilation.native_dirs.insert(pkg.get_package_id().clone(), native_dir);
+ // The new custom build command infrastructure handles its own output
+ // directory as part of freshness.
+ if target.is_none() {
+ let native_dir = cx.layout(pkg, kind).native(pkg);
+ pairs.push((cx.layout(pkg, kind).old_native(pkg), native_dir.clone()));
+ cx.compilation.native_dirs.insert(pkg.get_package_id().clone(),
+ native_dir);
+ }
Ok(prepare(is_fresh, new_loc, new_fingerprint, pairs))
}
pending: HashMap<(&'a PackageId, TargetStage), PendingBuild>,
state: HashMap<&'a PackageId, Freshness>,
ignored: HashSet<&'a PackageId>,
+ printed: HashSet<&'a PackageId>,
}
/// A helper structure for metadata about the state of a building package.
#[deriving(Hash, PartialEq, Eq, Clone, PartialOrd, Ord, Show)]
pub enum TargetStage {
StageStart,
- StageCustomBuild,
+ StageBuildCustomBuild,
+ StageRunCustomBuild,
StageLibraries,
StageBinaries,
StageTests,
pending: HashMap::new(),
state: HashMap::new(),
ignored: HashSet::new(),
+ printed: HashSet::new(),
}
}
let amt = if njobs == 0 {1} else {njobs};
let id = pkg.get_package_id().clone();
- if stage == StageStart && !self.ignored.contains(&pkg.get_package_id()) {
- match fresh.combine(self.state[pkg.get_package_id()]) {
- Fresh => try!(config.shell().verbose(|c| {
- c.status("Fresh", pkg)
- })),
- Dirty => try!(config.shell().status("Compiling", pkg))
- }
- }
-
// While the jobs are all running, we maintain some metadata about how
// many are running, the current state of freshness (of all the combined
// jobs), and the stage to pass to finish() later on.
fresh: fresh,
});
+ let mut total_fresh = fresh.combine(self.state[pkg.get_package_id()]);
+ let mut running = Vec::new();
for (job, job_freshness) in jobs.into_iter() {
let fresh = job_freshness.combine(fresh);
+ total_fresh = total_fresh.combine(fresh);
let my_tx = self.tx.clone();
let id = id.clone();
let (desc_tx, desc_rx) = channel();
self.pool.execute(proc() {
my_tx.send((id, stage, fresh, job.run(fresh, desc_tx)));
});
- if fresh == Dirty {
- // only the first message of each job is processed
- match desc_rx.recv_opt() {
- Ok(ref msg) if msg.len() >= 1 => {
- try!(config.shell().verbose(|shell| {
- shell.status("Running", msg.as_slice())
- }));
- },
- _ => ()
- }
+ // only the first message of each job is processed
+ match desc_rx.recv_opt() {
+ Ok(msg) => running.push(msg),
+ Err(..) => {}
}
}
if njobs == 0 {
self.tx.send((id, stage, fresh, Ok(())));
}
+
+ // Print out some nice progress information
+ //
+ // This isn't super trivial becuase we don't want to print loads and
+ // loads of information to the console, but we also want to produce a
+ // faithful representation of what's happening. This is somewhat nuanced
+ // as a package can start compiling *very* early on because of custom
+ // build commands and such.
+ //
+ // In general, we try to print "Compiling" for the first nontrivial task
+ // run for a package, regardless of when that is. We then don't print
+ // out any more information for a package after we've printed it once.
+ let print = !self.ignored.contains(&pkg.get_package_id());
+ let print = print && !self.printed.contains(&pkg.get_package_id());
+ if print && (stage == StageLibraries ||
+ (total_fresh == Dirty && running.len() > 0)) {
+ self.printed.insert(pkg.get_package_id());
+ match total_fresh {
+ Fresh => try!(config.shell().verbose(|c| {
+ c.status("Fresh", pkg)
+ })),
+ Dirty => try!(config.shell().status("Compiling", pkg))
+ }
+ }
+ for msg in running.iter() {
+ try!(config.shell().verbose(|c| c.status("Running", msg)));
+ }
Ok(())
}
}
let (id, stage) = *self;
let pkg = packages.iter().find(|p| p.get_package_id() == id).unwrap();
let deps = resolve.deps(id).into_iter().flat_map(|a| a)
- .filter(|dep| *dep != id);
+ .filter(|dep| *dep != id)
+ .map(|dep| {
+ (dep, pkg.get_dependencies().iter().find(|d| {
+ d.get_name() == dep.get_name()
+ }).unwrap())
+ });
match stage {
- StageStart => {
- // Only transitive dependencies are needed to start building a
- // package. Non transitive dependencies (dev dependencies) are
- // only used to build tests.
- deps.filter(|dep| {
- let dep = pkg.get_dependencies().iter().find(|d| {
- d.get_name() == dep.get_name()
- }).unwrap();
- dep.is_transitive()
- }).map(|dep| {
- (dep, StageLibraries)
- }).collect()
+ StageStart => Vec::new(),
+
+ StageBuildCustomBuild => {
+ // FIXME: build dependencies should come into play here
+ vec![(id, StageStart)]
}
- StageCustomBuild => vec![(id, StageStart)],
- StageLibraries => vec![(id, StageCustomBuild)],
+
+ // When running a custom build command, we need to be sure that our
+ // own custom build command is actually built, and then we need to
+ // wait for all our dependencies to finish their custom build
+ // commands themselves (as they may provide input to us).
+ StageRunCustomBuild => {
+ let mut base = vec![(id, StageBuildCustomBuild)];
+ base.extend(deps.filter(|&(_, dep)| dep.is_transitive())
+ .map(|(id, _)| (id, StageRunCustomBuild)));
+ base
+ }
+
+ // Building a library depends on our own custom build command plus
+ // all our transitive dependencies.
+ StageLibraries => {
+ let mut base = vec![(id, StageRunCustomBuild)];
+ base.extend(deps.filter(|&(_, dep)| dep.is_transitive())
+ .map(|(id, _)| (id, StageLibraries)));
+ base
+ }
+
+ // Binaries only depend on libraries being available. Note that they
+ // do not depend on dev-dependencies.
StageBinaries => vec![(id, StageLibraries)],
+
+ // Tests depend on all non-transitive dependencies
+ // (dev-dependencies) in addition to the library stage for this
+ // package.
StageTests => {
- let mut ret = vec![(id, StageLibraries)];
- ret.extend(deps.filter(|dep| {
- let dep = pkg.get_dependencies().iter().find(|d| {
- d.get_name() == dep.get_name()
- }).unwrap();
- !dep.is_transitive()
- }).map(|dep| {
- (dep, StageLibraries)
- }));
- ret
+ let mut base = vec![(id, StageLibraries)];
+ base.extend(deps.filter(|&(_, dep)| !dep.is_transitive())
+ .map(|(id, _)| (id, StageLibraries)));
+ base
}
}
}
(&self.old_native, &self.native),
(&self.old_fingerprint, &self.fingerprint),
(&self.old_examples, &self.examples),
+ (&self.old_build, &self.build),
]));
if self.old_root.exists() {
let _ = fs::rmdir_recursive(&self.old_native);
let _ = fs::rmdir_recursive(&self.old_fingerprint);
let _ = fs::rmdir_recursive(&self.old_examples);
+ let _ = fs::rmdir_recursive(&self.old_build);
}
}
use std::collections::{HashSet, HashMap};
use std::dynamic_lib::DynamicLibrary;
-use std::io::{fs, BufferedReader, USER_RWX};
-use std::io::fs::{File, PathExtensions};
+use std::io::{fs, USER_RWX};
+use std::io::fs::PathExtensions;
use std::os;
use core::{SourceMap, Package, PackageId, PackageSet, Target, Resolve};
// Only compile lib targets for dependencies
let targets = dep.get_targets().iter().filter(|target| {
- cx.is_relevant_target(*target)
+ target.get_profile().is_custom_build() ||
+ cx.is_relevant_target(*target)
}).collect::<Vec<&Target>>();
if targets.len() == 0 && dep.get_package_id() != resolve.root() {
//
// Each target has its own concept of freshness to ensure incremental
// rebuilds on the *target* granularity, not the *package* granularity.
- let (mut builds, mut libs, mut bins, mut tests) = (Vec::new(), Vec::new(),
- Vec::new(), Vec::new());
+ let (mut libs, mut bins, mut tests) = (Vec::new(), Vec::new(), Vec::new());
+ let (mut build_custom, mut run_custom) = (Vec::new(), Vec::new());
for &target in targets.iter() {
+ if target.get_profile().is_custom_build() {
+ // Custom build commands that are for libs that are overridden are
+ // skipped entirely
+ match pkg.get_manifest().get_links() {
+ Some(lib) => {
+ if cx.native_libs.lock().contains_key_equiv(&lib) {
+ continue
+ }
+ }
+ None => {}
+ }
+ let (dirty, fresh, freshness) =
+ try!(custom_build::prepare(pkg, target, cx));
+ run_custom.push((job(dirty, fresh), freshness));
+ }
+
let work = if target.get_profile().is_doc() {
let rustdoc = try!(rustdoc(pkg, target, cx));
vec![(rustdoc, KindTarget)]
} else {
let req = cx.get_requirement(pkg, target);
- let mut rustc = try!(rustc(pkg, target, cx, req));
-
- if target.get_profile().is_custom_build() {
- for &(ref mut work, _) in rustc.iter_mut() {
- use std::mem;
-
- let (old_build, script_output) = {
- let layout = cx.layout(pkg, KindHost);
- let old_build = layout.proxy().old_build(pkg);
- let script_output = layout.build(pkg);
- (old_build, script_output)
- };
-
- let execute_cmd = try!(custom_build::prepare_execute_custom_build(pkg,
- target,
- cx));
-
- // building a `Work` that creates the directory where the compiled script
- // must be placed
- let create_directory = proc() {
- if old_build.exists() {
- fs::rename(&old_build, &script_output)
- } else {
- fs::mkdir_recursive(&script_output, USER_RWX)
- }.chain_error(|| {
- internal("failed to create script output directory for build command")
- })
- };
-
- // replacing the simple rustc compilation by three steps:
- // 1 - create the output directory
- // 2 - call rustc
- // 3 - execute the command
- let rustc_cmd = mem::replace(work, proc(_) Ok(()));
- let replacement = proc(desc_tx: Sender<String>) {
- try!(create_directory());
- try!(rustc_cmd(desc_tx.clone()));
- execute_cmd(desc_tx)
- };
- mem::replace(work, replacement);
- }
- }
-
- rustc
+ try!(rustc(pkg, target, cx, req))
};
let dst = match (target.is_lib(),
target.get_profile().is_test(),
target.get_profile().is_custom_build()) {
- (_, _, true) => &mut builds,
+ (_, _, true) => &mut build_custom,
(_, true, _) => &mut tests,
(true, _, _) => &mut libs,
(false, false, _) if target.get_profile().get_env() == "test" => &mut tests,
}
}
- if builds.len() >= 1 {
+ if targets.iter().any(|t| t.get_profile().is_custom_build()) {
// New custom build system
- jobs.enqueue(pkg, jq::StageCustomBuild, builds);
+ jobs.enqueue(pkg, jq::StageBuildCustomBuild, build_custom);
+ jobs.enqueue(pkg, jq::StageRunCustomBuild, run_custom);
} else {
// Old custom build system
- // TODO: deprecated, remove
+ // OLD-BUILD: to-remove
let mut build_cmds = Vec::new();
for (i, build_cmd) in pkg.get_manifest().get_build().iter().enumerate() {
let work = try!(compile_custom_old(pkg, build_cmd.as_slice(), cx, i == 0));
build_cmds.push(work);
}
let (freshness, dirty, fresh) =
- try!(fingerprint::prepare_build_cmd(cx, pkg));
+ try!(fingerprint::prepare_build_cmd(cx, pkg, None));
let desc = match build_cmds.len() {
0 => String::new(),
1 => pkg.get_manifest().get_build()[0].to_string(),
for cmd in build_cmds.into_iter() { try!(cmd(desc_tx.clone())) }
dirty(desc_tx)
};
- jobs.enqueue(pkg, jq::StageCustomBuild, vec![(job(dirty, fresh),
- freshness)]);
+ jobs.enqueue(pkg, jq::StageBuildCustomBuild, vec![]);
+ jobs.enqueue(pkg, jq::StageRunCustomBuild, vec![(job(dirty, fresh),
+ freshness)]);
}
jobs.enqueue(pkg, jq::StageLibraries, libs);
Ok(())
}
-// TODO: deprecated, remove
+// OLD-BUILD: to-remove
fn compile_custom_old(pkg: &Package, cmd: &str,
cx: &Context, first: bool) -> CargoResult<Work> {
let root = cx.get_package(cx.resolve.root());
let show_warnings = package.get_package_id() == cx.resolve.root() ||
is_path_source;
let rustc = if show_warnings {rustc} else {rustc.arg("-Awarnings")};
- let build_cmd_layout = cx.layout(package, KindHost);
-
- // building the possible `build/$pkg/output` file for this local package
- let command_output_file = build_cmd_layout.build(package).join("output");
- // building the list of all possible `build/$pkg/output` files
- // whether they exist or not will be checked during the work
- let command_output_files = cx.dep_targets(package).iter().map(|&(pkg, _)| {
- build_cmd_layout.build(pkg).join("output")
- }).collect::<Vec<_>>();
+ // Prepare the native lib state (extra -L and -l flags)
+ let native_libs = cx.native_libs.clone();
+ let mut native_lib_deps = Vec::new();
+
+ // FIXME: traverse build dependencies and add -L and -l for an
+ // transitive build deps.
+ if !target.get_profile().is_custom_build() {
+ each_dep(package, cx, |dep| {
+ let primary = package.get_package_id() == dep.get_package_id();
+ match dep.get_manifest().get_links() {
+ Some(name) => native_lib_deps.push((name.to_string(), primary)),
+ None => {}
+ }
+ });
+ }
(proc(desc_tx: Sender<String>) {
let mut rustc = rustc;
- let mut additional_library_paths = Vec::new();
-
- // list of `-l` flags to pass to rustc coming from custom build scripts
- let additional_library_links = match File::open(&command_output_file) {
- Ok(f) => {
- let flags = try!(BuildOutput::parse(
- BufferedReader::new(f), name.as_slice()));
-
- additional_library_paths.extend(flags.library_paths.iter().map(|p| p.clone()));
- flags.library_links.clone()
- },
- Err(_) => Vec::new()
- };
-
- // loading each possible custom build output file to fill `additional_library_paths`
- for flags_file in command_output_files.into_iter() {
- let flags = match File::open(&flags_file) {
- Ok(f) => f,
- Err(_) => continue // the file doesn't exist, probably means that this pkg
- // doesn't have a build command
- };
-
- let flags = try!(BuildOutput::parse(
- BufferedReader::new(flags), name.as_slice()));
- additional_library_paths.extend(flags.library_paths.iter().map(|p| p.clone()));
- }
-
- for p in additional_library_paths.into_iter() {
- rustc = rustc.arg("-L").arg(p);
- }
- for lib in additional_library_links.into_iter() {
- rustc = rustc.arg("-l").arg(lib);
+ // Only at runtime have we discovered what the extra -L and -l
+ // arguments are for native libraries, so we process those here.
+ {
+ let native_libs = native_libs.lock();
+ for &(ref lib, primary) in native_lib_deps.iter() {
+ let output = &(*native_libs)[*lib];
+ for path in output.library_paths.iter() {
+ rustc = rustc.arg("-L").arg(path);
+ }
+ if primary {
+ for name in output.library_links.iter() {
+ rustc = rustc.arg("-l").arg(name.as_slice());
+ }
+ }
+ }
}
desc_tx.send_opt(rustc.to_string()).ok();
// Traverse the entire dependency graph looking for -L paths to pass for
// native dependencies.
- // TODO: deprecated, remove
+ // OLD-BUILD: to-remove
+ // FIXME: traverse build deps for build cmds
let mut dirs = Vec::new();
each_dep(package, cx, |pkg| {
if pkg.get_manifest().get_build().len() > 0 {
cmd = cmd.arg("-L").arg(dir);
}
- for &(pkg, target) in cx.dep_targets(package).iter() {
- cmd = try!(link_to(cmd, pkg, target, cx, kind));
- }
+ if target.get_profile().is_custom_build() {
+ // Custom build commands don't link to any other targets in the package,
+ // and they also link to all build dependencies, not normal dependencies
+ for &(pkg, target) in cx.build_dep_targets(package).iter() {
+ cmd = try!(link_to(cmd, pkg, target, cx, kind));
+ }
+ } else {
+ for &(pkg, target) in cx.dep_targets(package).iter() {
+ cmd = try!(link_to(cmd, pkg, target, cx, kind));
+ }
- let mut targets = package.get_targets().iter().filter(|target| {
- target.is_lib() && target.get_profile().is_compile()
- });
+ let targets = package.get_targets().iter().filter(|target| {
+ target.is_lib() && target.get_profile().is_compile()
+ });
- if target.is_bin() {
- for target in targets {
- if target.is_staticlib() {
- continue;
+ if target.is_bin() {
+ for target in targets.filter(|f| !f.is_staticlib()) {
+ cmd = try!(link_to(cmd, package, target, cx, kind));
}
-
- cmd = try!(link_to(cmd, package, target, cx, kind));
}
}
let mut search_path = DynamicLibrary::search_path();
search_path.push(layout.deps().clone());
- // TODO: deprecated, remove
+ // OLD-BUILD: to-remove
// Also be sure to pick up any native build directories required by plugins
// or their dependencies
let mut native_search_paths = HashSet::new();
let profiles = [
merge(Profile::default_dev().for_host(true).custom_build(true),
&profiles.dev),
- merge(Profile::default_release().for_host(true).custom_build(true),
- &profiles.release),
];
let name = format!("build-script-{}", cmd.filestem_str().unwrap_or(""));
let files = fs::readdir(&p.root().join("target")).assert();
let mut files: Vec<String> = files.iter().filter_map(|f| {
match f.filename_str().unwrap() {
- "examples" | "deps" => None,
+ "build" | "examples" | "deps" => None,
s if s.contains("fingerprint") || s.contains("dSYM") => None,
s => Some(s.to_string())
}
let files = fs::readdir(&p.root().join("target")).assert();
let mut files: Vec<String> = files.iter().filter_map(|f| {
match f.filename_str().unwrap() {
- "examples" | "deps" => None,
+ "build" | "examples" | "deps" => None,
s if s.contains("fingerprint") || s.contains("dSYM") => None,
s => Some(s.to_string())
}
build = build
.file("Cargo.toml", r#"
[package]
- name = "build"
+ name = "builder"
version = "0.5.0"
authors = ["wycats@example.com"]
"#)
version = "0.0.0"
authors = []
build = '{}'
- "#, build.bin("build").display()).as_slice())
+ "#, build.bin("builder").display()).as_slice())
.file("src/lib.rs", "pub fn bar() -> int { 1 }");
foo.build();
foo.root().move_into_the_past().assert();
.with_stdout(format!("\
{compiling} foo v0.5.0 ({url})
{running} `rustc build.rs --crate-name build-script-build --crate-type bin [..]`
+{running} `[..]build-script-build`
",
url = p.url(), compiling = COMPILING, running = RUNNING))
.with_stderr(format!("\
let _feat = os::getenv("CARGO_FEATURE_FOO").unwrap();
}}
"#,
- p.root().join("target").join("native").display());
+ p.root().join("target").join("build").display());
let p = p.file("bar/build.rs", file_content);
"#)
.file(".cargo/config", format!(r#"
[target.{}.foo]
- rustc-flags = "-l foo -L bar"
+ rustc-flags = "-L foo -L bar"
foo = "bar"
bar = "baz"
"#, target).as_slice())
assert_that(p.cargo_process("build").arg("-v"),
execs().with_status(0)
- .with_stdout("\
-Compiling a v0.5.0 (file://[..])
- Running `rustc [..] --crate-name a [..]`
-Compiling foo v0.5.0 (file://[..])
- Running `rustc build.rs [..]`
- Running `rustc [..] --crate-name foo [..]`
-"));
+ .with_stdout(format!("\
+{compiling} foo v0.5.0 (file://[..])
+{running} `rustc build.rs [..]`
+{compiling} a v0.5.0 (file://[..])
+{running} `rustc [..] --crate-name a [..]`
+{running} `[..]build-script-build`
+{running} `rustc [..] --crate-name foo [..] -L foo -L bar[..]`
+", compiling = COMPILING, running = RUNNING).as_slice()));
+})
+
+test!(links_passes_env_vars {
+ let p = project("foo")
+ .file("Cargo.toml", r#"
+ [project]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+ build = "build.rs"
+
+ [dependencies.a]
+ path = "a"
+ "#)
+ .file("src/lib.rs", "")
+ .file("build.rs", r#"
+ use std::os;
+ fn main() {
+ assert_eq!(os::getenv("DEP_FOO_FOO").unwrap().as_slice(), "bar");
+ assert_eq!(os::getenv("DEP_FOO_BAR").unwrap().as_slice(), "baz");
+ }
+ "#)
+ .file("a/Cargo.toml", r#"
+ [project]
+ name = "a"
+ version = "0.5.0"
+ authors = []
+ links = "foo"
+ build = "build.rs"
+ "#)
+ .file("a/src/lib.rs", "")
+ .file("a/build.rs", r#"
+ fn main() {
+ println!("cargo:foo=bar");
+ println!("cargo:bar=baz");
+ }
+ "#);
+
+ assert_that(p.cargo_process("build").arg("-v"),
+ execs().with_status(0)
+ .with_stdout(format!("\
+{compiling} [..] v0.5.0 (file://[..])
+{running} `rustc build.rs [..]`
+{compiling} [..] v0.5.0 (file://[..])
+{running} `rustc build.rs [..]`
+{running} `[..]`
+{running} `[..]`
+{running} `[..]`
+{running} `rustc [..] --crate-name foo [..]`
+", compiling = COMPILING, running = RUNNING).as_slice()));
})
})
test!(plugin_with_dynamic_native_dependency {
- let build = project("build")
+ let build = project("builder")
.file("Cargo.toml", r#"
[package]
- name = "build"
+ name = "builder"
version = "0.0.1"
authors = []
[lib]
- name = "build"
+ name = "builder"
crate-type = ["dylib"]
"#)
.file("src/main.rs", r#"
[lib]
name = "bar"
plugin = true
- "#, build.bin("build").display()))
+ "#, build.bin("builder").display()))
.file("bar/src/lib.rs", format!(r#"
#![feature(plugin_registrar)]